def pca(X):# Data matrix X, assumes 0-centered n, m = X.shapeassert np.allclose(X.mean(axis=0), np.zeros(m))# Compute covariance matrix C = np.dot(X.T, X) / (n-1)# Eigen decomposition eigen_vals, eigen_vecs = np.linalg.eig(C)# Project X onto PC space X_pca = np.dot(X, eigen_vecs)return X_pca, eigen_vecs
assuming that the control has the same dimensions of the observations then if we are doing a local slope model we have \(B \in \mathbb{R}^{state \times contr}\): \[ B = \begin{bmatrix} -I & I \\ 0 & 0 \end{bmatrix}\]
/home/simone/anaconda3/envs/data-science/lib/python3.10/site-packages/fastai/callback/core.py:69: UserWarning: You are shadowing an attribute (__class__) that exists in the learner. Use `self.learn.__class__` to avoid this
warn(f"You are shadowing an attribute ({name}) that exists in the learner. Use `self.learn.{name}` to avoid this")
epoch
train_loss
valid_loss
rmse
rmse_gap
r2
r2_gap
time
0
-108.023250
-113.113034
0.073622
0.198386
0.956828
-105578612000578019271909572608.000000
02:37
state
x_0
x_1
x_2
x_3
x_4
x_5
x_0
1.1440
-0.0057
0.0107
0.8262
-0.0845
-0.1205
x_1
-0.0534
1.2089
-0.0223
0.0948
0.7937
0.0494
x_2
-0.0289
0.0176
1.1417
-0.0702
-0.1154
0.8351
x_3
0.1277
0.0589
0.1074
1.1787
-0.1203
-0.1024
x_4
0.0654
0.1560
0.0043
-0.1781
1.1087
-0.1415
x_5
0.0959
0.0439
0.1197
-0.1725
-0.0340
1.1496
trans cov (Q)
state
x_0
x_1
x_2
x_3
x_4
x_5
x_0
0.0207
0.0254
0.0210
-0.0052
-0.0232
-0.0033
x_1
0.0254
0.0548
0.0255
0.0259
0.0034
0.0281
x_2
0.0210
0.0255
0.0222
-0.0056
-0.0241
-0.0023
x_3
-0.0052
0.0259
-0.0056
0.0473
0.0504
0.0465
x_4
-0.0232
0.0034
-0.0241
0.0504
0.0695
0.0473
x_5
-0.0033
0.0281
-0.0023
0.0465
0.0473
0.0477
trans off
state
offset
x_0
0.0004
x_1
-0.0055
x_2
0.0048
x_3
0.0027
x_4
-0.0033
x_5
0.0021
obs matrix (H)
variable
x_0
x_1
x_2
x_3
x_4
x_5
y_0
0.7902
-0.2032
-0.1217
-0.0986
0.2164
0.1090
y_1
0.0160
0.7417
0.0555
0.1016
0.1041
0.0374
y_2
-0.1103
-0.1594
0.7916
0.1185
0.2096
-0.0949
obs cov (R)
variable
y_0
y_1
y_2
y_0
0.0077
0.0000
0.0000
y_1
0.0000
0.0077
0.0000
y_2
0.0000
0.0000
0.0077
obs off
variable
offset
y_0
-0.0009
y_1
-0.0053
y_2
-0.0021
contr matrix (B)
state
c_0
c_1
c_2
c_3
c_4
c_5
x_0
-1.0277
-0.0595
-0.0019
0.9181
0.0838
-0.0194
x_1
-0.0414
-0.8661
-0.0323
0.0110
0.8741
0.0262
x_2
-0.0029
-0.0647
-1.0045
-0.0319
0.0757
0.8957
x_3
-0.0678
-0.0368
-0.0564
-0.0559
-0.0610
-0.0639
x_4
-0.0374
-0.0690
-0.0209
-0.0485
-0.0406
-0.0354
x_5
-0.0591
-0.0267
-0.0753
-0.0633
-0.0407
-0.0555
init state mean
state
mean
x_0
-0.0396
x_1
-0.0009
x_2
-0.0049
x_3
0.0153
x_4
-0.0139
x_5
-0.0094
init state cov
state
x_0
x_1
x_2
x_3
x_4
x_5
x_0
2.6364
-0.1456
-0.0020
0.5695
0.3986
0.1628
x_1
-0.1456
2.3292
0.1064
0.2074
0.7591
0.1475
x_2
-0.0020
0.1064
2.4205
0.3489
0.1488
0.6143
x_3
0.5695
0.2074
0.3489
2.2351
-0.4355
-0.3277
x_4
0.3986
0.7591
0.1488
-0.4355
2.1625
-0.3445
x_5
0.1628
0.1475
0.6143
-0.3277
-0.3445
2.2518
# learn.save("17_jan_all_gaps")
Path('models/17_jan_all_gaps.pth')
train_show_save(learn, 2, 1e-3)
epoch
train_loss
valid_loss
rmse
rmse_gap
r2
r2_gap
time
0
-122.620241
-126.621236
0.077944
0.202366
0.948695
-115274990280631988956159803392.000000
02:38
1
-134.455010
-139.768441
0.074152
0.188695
0.959962
-103653315974387814635399020544.000000
02:31
state
x_0
x_1
x_2
x_3
x_4
x_5
x_0
1.1347
-0.0147
0.0100
0.8021
-0.1122
-0.1444
x_1
-0.0388
1.2480
-0.0185
0.0773
0.7576
0.0414
x_2
-0.0257
-0.0018
1.1500
-0.0814
-0.1272
0.8203
x_3
0.1332
0.0451
0.0866
1.1612
-0.1103
-0.0917
x_4
0.0559
0.1720
0.0056
-0.1625
1.1052
-0.1248
x_5
0.0721
0.0457
0.1226
-0.1523
-0.0385
1.1357
trans cov (Q)
state
x_0
x_1
x_2
x_3
x_4
x_5
x_0
0.0146
0.0183
0.0142
-0.0081
-0.0202
-0.0067
x_1
0.0183
0.0445
0.0187
0.0148
-0.0014
0.0168
x_2
0.0142
0.0187
0.0143
-0.0065
-0.0187
-0.0044
x_3
-0.0081
0.0148
-0.0065
0.0340
0.0393
0.0343
x_4
-0.0202
-0.0014
-0.0187
0.0393
0.0550
0.0378
x_5
-0.0067
0.0168
-0.0044
0.0343
0.0378
0.0361
trans off
state
offset
x_0
0.0014
x_1
-0.0059
x_2
0.0039
x_3
0.0016
x_4
-0.0036
x_5
0.0032
obs matrix (H)
variable
x_0
x_1
x_2
x_3
x_4
x_5
y_0
0.7310
-0.1642
-0.0983
-0.1030
0.2236
0.0907
y_1
0.0158
0.6777
0.0705
0.1196
0.1642
0.0316
y_2
-0.1124
-0.1249
0.7399
0.1211
0.2193
-0.0846
obs cov (R)
variable
y_0
y_1
y_2
y_0
0.0064
0.0000
0.0000
y_1
0.0000
0.0065
0.0000
y_2
0.0000
0.0000
0.0065
obs off
variable
offset
y_0
-0.0019
y_1
-0.0046
y_2
-0.0026
contr matrix (B)
state
c_0
c_1
c_2
c_3
c_4
c_5
x_0
-1.0316
-0.0708
-0.0019
0.8851
0.0973
-0.0094
x_1
-0.0727
-0.8659
-0.0584
0.0075
0.7964
0.0142
x_2
0.0098
-0.0734
-1.0045
-0.0156
0.0845
0.8489
x_3
-0.0977
-0.0434
-0.0574
-0.0723
-0.0760
-0.0730
x_4
-0.0309
-0.1114
-0.0185
-0.0559
-0.0634
-0.0420
x_5
-0.0503
-0.0424
-0.1056
-0.0644
-0.0571
-0.0729
init state mean
state
mean
x_0
-0.0585
x_1
-0.0141
x_2
-0.0070
x_3
0.0239
x_4
-0.0043
x_5
-0.0069
init state cov
state
x_0
x_1
x_2
x_3
x_4
x_5
x_0
2.9257
0.1245
0.1987
0.4475
0.2435
0.0893
x_1
0.1245
2.4393
0.3641
0.2107
0.7802
0.1895
x_2
0.1987
0.3641
2.4862
0.2959
0.0089
0.6053
x_3
0.4475
0.2107
0.2959
2.1058
-0.5771
-0.4644
x_4
0.2435
0.7802
0.0089
-0.5771
1.9818
-0.5064
x_5
0.0893
0.1895
0.6053
-0.4644
-0.5064
2.0952
train_show_save(learn, 2, 1e-3)
epoch
train_loss
valid_loss
rmse
rmse_gap
r2
r2_gap
time
0
-149.288297
-152.804403
0.072760
0.179190
0.956827
-61340364986143763995601928192.000000
02:35
1
-160.099637
-165.002097
0.071263
0.181697
0.963587
-48387608947773502947627892736.000000
02:37
# learn.save("17_jan_all_gaps_final")
Path('models/17_jan_all_gaps_final.pth')
p = show_results(learn, control=hai_control, items = [items[i] for i in [0,5,4]])